+page.svelte 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import toast from 'svelte-french-toast';
  4. import { onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
  8. import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
  9. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  10. import { copyToClipboard, splitStream } from '$lib/utils';
  11. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  12. import Messages from '$lib/components/chat/Messages.svelte';
  13. import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
  14. import Navbar from '$lib/components/layout/Navbar.svelte';
  15. import { createNewChat, getChatById, getChatList, updateChatById } from '$lib/apis/chats';
  16. let loaded = false;
  17. let stopResponseFlag = false;
  18. let autoScroll = true;
  19. // let chatId = $page.params.id;
  20. let selectedModels = [''];
  21. let selectedModelfile = null;
  22. $: selectedModelfile =
  23. selectedModels.length === 1 &&
  24. $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
  25. ? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
  26. : null;
  27. let selectedModelfiles = {};
  28. $: selectedModelfiles = selectedModels.reduce((a, tagName, i, arr) => {
  29. const modelfile =
  30. $modelfiles.filter((modelfile) => modelfile.tagName === tagName)?.at(0) ?? undefined;
  31. return {
  32. ...a,
  33. ...(modelfile && { [tagName]: modelfile })
  34. };
  35. }, {});
  36. let chat = null;
  37. let title = '';
  38. let prompt = '';
  39. let files = [];
  40. let messages = [];
  41. let history = {
  42. messages: {},
  43. currentId: null
  44. };
  45. $: if (history.currentId !== null) {
  46. let _messages = [];
  47. let currentMessage = history.messages[history.currentId];
  48. while (currentMessage !== null) {
  49. _messages.unshift({ ...currentMessage });
  50. currentMessage =
  51. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  52. }
  53. messages = _messages;
  54. } else {
  55. messages = [];
  56. }
  57. $: if ($page.params.id) {
  58. (async () => {
  59. if (await loadChat()) {
  60. await tick();
  61. loaded = true;
  62. } else {
  63. await goto('/');
  64. }
  65. })();
  66. }
  67. //////////////////////////
  68. // Web functions
  69. //////////////////////////
  70. const loadChat = async () => {
  71. await chatId.set($page.params.id);
  72. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  73. await goto('/');
  74. return null;
  75. });
  76. if (chat) {
  77. const chatContent = chat.chat;
  78. if (chatContent) {
  79. console.log(chatContent);
  80. selectedModels =
  81. (chatContent?.models ?? undefined) !== undefined
  82. ? chatContent.models
  83. : [chatContent.model ?? ''];
  84. history =
  85. (chatContent?.history ?? undefined) !== undefined
  86. ? chatContent.history
  87. : convertMessagesToHistory(chatContent.messages);
  88. title = chatContent.title;
  89. let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
  90. await settings.set({
  91. ..._settings,
  92. system: chatContent.system ?? _settings.system,
  93. options: chatContent.options ?? _settings.options
  94. });
  95. autoScroll = true;
  96. await tick();
  97. if (messages.length > 0) {
  98. history.messages[messages.at(-1).id].done = true;
  99. }
  100. await tick();
  101. return true;
  102. } else {
  103. return null;
  104. }
  105. }
  106. };
  107. //////////////////////////
  108. // Ollama functions
  109. //////////////////////////
  110. const sendPrompt = async (prompt, parentId) => {
  111. const _chatId = JSON.parse(JSON.stringify($chatId));
  112. await Promise.all(
  113. selectedModels.map(async (model) => {
  114. console.log(model);
  115. const modelTag = $models.filter((m) => m.name === model).at(0);
  116. if (modelTag?.external) {
  117. await sendPromptOpenAI(model, prompt, parentId, _chatId);
  118. } else if (modelTag) {
  119. await sendPromptOllama(model, prompt, parentId, _chatId);
  120. } else {
  121. toast.error(`Model ${model} not found`);
  122. }
  123. })
  124. );
  125. await chats.set(await getChatList(localStorage.token));
  126. };
  127. const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
  128. // Create response message
  129. let responseMessageId = uuidv4();
  130. let responseMessage = {
  131. parentId: parentId,
  132. id: responseMessageId,
  133. childrenIds: [],
  134. role: 'assistant',
  135. content: '',
  136. model: model
  137. };
  138. // Add message to history and Set currentId to messageId
  139. history.messages[responseMessageId] = responseMessage;
  140. history.currentId = responseMessageId;
  141. // Append messageId to childrenIds of parent message
  142. if (parentId !== null) {
  143. history.messages[parentId].childrenIds = [
  144. ...history.messages[parentId].childrenIds,
  145. responseMessageId
  146. ];
  147. }
  148. // Wait until history/message have been updated
  149. await tick();
  150. // Scroll down
  151. window.scrollTo({ top: document.body.scrollHeight });
  152. const res = await generateChatCompletion(localStorage.token, {
  153. model: model,
  154. messages: [
  155. $settings.system
  156. ? {
  157. role: 'system',
  158. content: $settings.system
  159. }
  160. : undefined,
  161. ...messages
  162. ]
  163. .filter((message) => message)
  164. .map((message) => ({
  165. role: message.role,
  166. content: message.content,
  167. ...(message.files && {
  168. images: message.files
  169. .filter((file) => file.type === 'image')
  170. .map((file) => file.url.slice(file.url.indexOf(',') + 1))
  171. })
  172. })),
  173. options: {
  174. ...($settings.options ?? {})
  175. },
  176. format: $settings.requestFormat ?? undefined
  177. });
  178. if (res && res.ok) {
  179. const reader = res.body
  180. .pipeThrough(new TextDecoderStream())
  181. .pipeThrough(splitStream('\n'))
  182. .getReader();
  183. while (true) {
  184. const { value, done } = await reader.read();
  185. if (done || stopResponseFlag || _chatId !== $chatId) {
  186. responseMessage.done = true;
  187. messages = messages;
  188. break;
  189. }
  190. try {
  191. let lines = value.split('\n');
  192. for (const line of lines) {
  193. if (line !== '') {
  194. console.log(line);
  195. let data = JSON.parse(line);
  196. if ('detail' in data) {
  197. throw data;
  198. }
  199. if (data.done == false) {
  200. if (responseMessage.content == '' && data.message.content == '\n') {
  201. continue;
  202. } else {
  203. responseMessage.content += data.message.content;
  204. messages = messages;
  205. }
  206. } else {
  207. responseMessage.done = true;
  208. if (responseMessage.content == '') {
  209. responseMessage.error = true;
  210. responseMessage.content =
  211. 'Oops! No text generated from Ollama, Please try again.';
  212. }
  213. responseMessage.context = data.context ?? null;
  214. responseMessage.info = {
  215. total_duration: data.total_duration,
  216. load_duration: data.load_duration,
  217. sample_count: data.sample_count,
  218. sample_duration: data.sample_duration,
  219. prompt_eval_count: data.prompt_eval_count,
  220. prompt_eval_duration: data.prompt_eval_duration,
  221. eval_count: data.eval_count,
  222. eval_duration: data.eval_duration
  223. };
  224. messages = messages;
  225. if ($settings.notificationEnabled && !document.hasFocus()) {
  226. const notification = new Notification(
  227. selectedModelfile
  228. ? `${
  229. selectedModelfile.title.charAt(0).toUpperCase() +
  230. selectedModelfile.title.slice(1)
  231. }`
  232. : `Ollama - ${model}`,
  233. {
  234. body: responseMessage.content,
  235. icon: selectedModelfile?.imageUrl ?? '/favicon.png'
  236. }
  237. );
  238. }
  239. if ($settings.responseAutoCopy) {
  240. copyToClipboard(responseMessage.content);
  241. }
  242. }
  243. }
  244. }
  245. } catch (error) {
  246. console.log(error);
  247. if ('detail' in error) {
  248. toast.error(error.detail);
  249. }
  250. break;
  251. }
  252. if (autoScroll) {
  253. window.scrollTo({ top: document.body.scrollHeight });
  254. }
  255. }
  256. if ($chatId == _chatId) {
  257. chat = await updateChatById(localStorage.token, _chatId, {
  258. messages: messages,
  259. history: history
  260. });
  261. await chats.set(await getChatList(localStorage.token));
  262. }
  263. } else {
  264. if (res !== null) {
  265. const error = await res.json();
  266. console.log(error);
  267. if ('detail' in error) {
  268. toast.error(error.detail);
  269. responseMessage.content = error.detail;
  270. } else {
  271. toast.error(error.error);
  272. responseMessage.content = error.error;
  273. }
  274. } else {
  275. toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
  276. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  277. }
  278. responseMessage.error = true;
  279. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  280. responseMessage.done = true;
  281. messages = messages;
  282. }
  283. stopResponseFlag = false;
  284. await tick();
  285. if (autoScroll) {
  286. window.scrollTo({ top: document.body.scrollHeight });
  287. }
  288. if (messages.length == 2 && messages.at(1).content !== '') {
  289. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  290. await generateChatTitle(_chatId, userPrompt);
  291. }
  292. };
  293. const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
  294. if ($settings.OPENAI_API_KEY) {
  295. if (models) {
  296. let responseMessageId = uuidv4();
  297. let responseMessage = {
  298. parentId: parentId,
  299. id: responseMessageId,
  300. childrenIds: [],
  301. role: 'assistant',
  302. content: '',
  303. model: model
  304. };
  305. history.messages[responseMessageId] = responseMessage;
  306. history.currentId = responseMessageId;
  307. if (parentId !== null) {
  308. history.messages[parentId].childrenIds = [
  309. ...history.messages[parentId].childrenIds,
  310. responseMessageId
  311. ];
  312. }
  313. window.scrollTo({ top: document.body.scrollHeight });
  314. const res = await generateOpenAIChatCompletion(localStorage.token, {
  315. model: model,
  316. stream: true,
  317. messages: [
  318. $settings.system
  319. ? {
  320. role: 'system',
  321. content: $settings.system
  322. }
  323. : undefined,
  324. ...messages
  325. ]
  326. .filter((message) => message)
  327. .map((message) => ({
  328. role: message.role,
  329. ...(message.files
  330. ? {
  331. content: [
  332. {
  333. type: 'text',
  334. text: message.content
  335. },
  336. ...message.files
  337. .filter((file) => file.type === 'image')
  338. .map((file) => ({
  339. type: 'image_url',
  340. image_url: {
  341. url: file.url
  342. }
  343. }))
  344. ]
  345. }
  346. : { content: message.content })
  347. })),
  348. seed: $settings?.options?.seed ?? undefined,
  349. stop: $settings?.options?.stop ?? undefined,
  350. temperature: $settings?.options?.temperature ?? undefined,
  351. top_p: $settings?.options?.top_p ?? undefined,
  352. num_ctx: $settings?.options?.num_ctx ?? undefined,
  353. frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
  354. max_tokens: $settings?.options?.num_predict ?? undefined
  355. });
  356. if (res && res.ok) {
  357. const reader = res.body
  358. .pipeThrough(new TextDecoderStream())
  359. .pipeThrough(splitStream('\n'))
  360. .getReader();
  361. while (true) {
  362. const { value, done } = await reader.read();
  363. if (done || stopResponseFlag || _chatId !== $chatId) {
  364. responseMessage.done = true;
  365. messages = messages;
  366. break;
  367. }
  368. try {
  369. let lines = value.split('\n');
  370. for (const line of lines) {
  371. if (line !== '') {
  372. console.log(line);
  373. if (line === 'data: [DONE]') {
  374. responseMessage.done = true;
  375. messages = messages;
  376. } else {
  377. let data = JSON.parse(line.replace(/^data: /, ''));
  378. console.log(data);
  379. if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
  380. continue;
  381. } else {
  382. responseMessage.content += data.choices[0].delta.content ?? '';
  383. messages = messages;
  384. }
  385. }
  386. }
  387. }
  388. } catch (error) {
  389. console.log(error);
  390. }
  391. if ($settings.notificationEnabled && !document.hasFocus()) {
  392. const notification = new Notification(`OpenAI ${model}`, {
  393. body: responseMessage.content,
  394. icon: '/favicon.png'
  395. });
  396. }
  397. if ($settings.responseAutoCopy) {
  398. copyToClipboard(responseMessage.content);
  399. }
  400. if (autoScroll) {
  401. window.scrollTo({ top: document.body.scrollHeight });
  402. }
  403. }
  404. if ($chatId == _chatId) {
  405. chat = await updateChatById(localStorage.token, _chatId, {
  406. messages: messages,
  407. history: history
  408. });
  409. await chats.set(await getChatList(localStorage.token));
  410. }
  411. } else {
  412. if (res !== null) {
  413. const error = await res.json();
  414. console.log(error);
  415. if ('detail' in error) {
  416. toast.error(error.detail);
  417. responseMessage.content = error.detail;
  418. } else {
  419. if ('message' in error.error) {
  420. toast.error(error.error.message);
  421. responseMessage.content = error.error.message;
  422. } else {
  423. toast.error(error.error);
  424. responseMessage.content = error.error;
  425. }
  426. }
  427. } else {
  428. toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
  429. responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
  430. }
  431. responseMessage.error = true;
  432. responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
  433. responseMessage.done = true;
  434. messages = messages;
  435. }
  436. stopResponseFlag = false;
  437. await tick();
  438. if (autoScroll) {
  439. window.scrollTo({ top: document.body.scrollHeight });
  440. }
  441. if (messages.length == 2) {
  442. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  443. await setChatTitle(_chatId, userPrompt);
  444. }
  445. }
  446. }
  447. };
  448. const submitPrompt = async (userPrompt) => {
  449. console.log('submitPrompt', $chatId);
  450. if (selectedModels.includes('')) {
  451. toast.error('Model not selected');
  452. } else if (messages.length != 0 && messages.at(-1).done != true) {
  453. // Response not done
  454. console.log('wait');
  455. } else {
  456. // Reset chat message textarea height
  457. document.getElementById('chat-textarea').style.height = '';
  458. // Create user message
  459. let userMessageId = uuidv4();
  460. let userMessage = {
  461. id: userMessageId,
  462. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  463. childrenIds: [],
  464. role: 'user',
  465. content: userPrompt,
  466. files: files.length > 0 ? files : undefined
  467. };
  468. // Add message to history and Set currentId to messageId
  469. history.messages[userMessageId] = userMessage;
  470. history.currentId = userMessageId;
  471. // Append messageId to childrenIds of parent message
  472. if (messages.length !== 0) {
  473. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  474. }
  475. // Wait until history/message have been updated
  476. await tick();
  477. // Create new chat if only one message in messages
  478. if (messages.length == 1) {
  479. chat = await createNewChat(localStorage.token, {
  480. id: $chatId,
  481. title: 'New Chat',
  482. models: selectedModels,
  483. system: $settings.system ?? undefined,
  484. options: {
  485. ...($settings.options ?? {})
  486. },
  487. messages: messages,
  488. history: history,
  489. timestamp: Date.now()
  490. });
  491. await chats.set(await getChatList(localStorage.token));
  492. await chatId.set(chat.id);
  493. await tick();
  494. }
  495. // Reset chat input textarea
  496. prompt = '';
  497. files = [];
  498. // Send prompt
  499. await sendPrompt(userPrompt, userMessageId);
  500. }
  501. };
  502. const stopResponse = () => {
  503. stopResponseFlag = true;
  504. console.log('stopResponse');
  505. };
  506. const regenerateResponse = async () => {
  507. console.log('regenerateResponse');
  508. if (messages.length != 0 && messages.at(-1).done == true) {
  509. messages.splice(messages.length - 1, 1);
  510. messages = messages;
  511. let userMessage = messages.at(-1);
  512. let userPrompt = userMessage.content;
  513. await sendPrompt(userPrompt, userMessage.id);
  514. }
  515. };
  516. const generateChatTitle = async (_chatId, userPrompt) => {
  517. if ($settings.titleAutoGenerate ?? true) {
  518. const title = await generateTitle(localStorage.token, selectedModels[0], userPrompt);
  519. if (title) {
  520. await setChatTitle(_chatId, title);
  521. }
  522. } else {
  523. await setChatTitle(_chatId, `${userPrompt}`);
  524. }
  525. };
  526. const setChatTitle = async (_chatId, _title) => {
  527. if (_chatId === $chatId) {
  528. title = _title;
  529. }
  530. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  531. await chats.set(await getChatList(localStorage.token));
  532. };
  533. </script>
  534. <svelte:window
  535. on:scroll={(e) => {
  536. autoScroll = window.innerHeight + window.scrollY >= document.body.offsetHeight - 40;
  537. }}
  538. />
  539. {#if loaded}
  540. <Navbar
  541. {title}
  542. shareEnabled={messages.length > 0}
  543. initNewChat={() => {
  544. goto('/');
  545. }}
  546. />
  547. <div class="min-h-screen w-full flex justify-center">
  548. <div class=" py-2.5 flex flex-col justify-between w-full">
  549. <div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
  550. <ModelSelector bind:selectedModels disabled={messages.length > 0} />
  551. </div>
  552. <div class=" h-full mt-10 mb-32 w-full flex flex-col">
  553. <Messages
  554. chatId={$chatId}
  555. {selectedModels}
  556. {selectedModelfiles}
  557. bind:history
  558. bind:messages
  559. bind:autoScroll
  560. bottomPadding={files.length > 0}
  561. {sendPrompt}
  562. {regenerateResponse}
  563. />
  564. </div>
  565. </div>
  566. <MessageInput
  567. bind:files
  568. bind:prompt
  569. bind:autoScroll
  570. suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
  571. {
  572. title: ['Help me study', 'vocabulary for a college entrance exam'],
  573. content: `Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.`
  574. },
  575. {
  576. title: ['Give me ideas', `for what to do with my kids' art`],
  577. content: `What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.`
  578. },
  579. {
  580. title: ['Tell me a fun fact', 'about the Roman Empire'],
  581. content: 'Tell me a random fun fact about the Roman Empire'
  582. },
  583. {
  584. title: ['Show me a code snippet', `of a website's sticky header`],
  585. content: `Show me a code snippet of a website's sticky header in CSS and JavaScript.`
  586. }
  587. ]}
  588. {messages}
  589. {submitPrompt}
  590. {stopResponse}
  591. />
  592. </div>
  593. {/if}